import numpy as np
import tensorflow.compat.v2 as tf
tf.enable_v2_behavior()
import pandas as pd
from tensorflow import keras
from sklearn.preprocessing import StandardScaler
from sklearn.preprocessing import RobustScaler
from sklearn.preprocessing import MinMaxScaler
from matplotlib import pyplot
import plotly.graph_objects as go
import math
import seaborn as sns
from sklearn.metrics import mean_squared_error
np.random.seed(1)
tf.random.set_seed(1)
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, LSTM, GRU, Dropout, RepeatVector, TimeDistributed
from keras import backend
MODELFILENAME = 'MODELS/GRU_1d_TFM_2c'
TIME_STEPS=144 #1d
CMODEL = GRU
MODEL = "GRU"
UNITS=43
DROPOUT1=0.405
DROPOUT2=0.331
ACTIVATION='tanh'
OPTIMIZER='adadelta'
EPOCHS=56
BATCHSIZE=11
VALIDATIONSPLIT=0.1
# Code to read csv file into Colaboratory:
# from google.colab import files
# uploaded = files.upload()
# import io
# df = pd.read_csv(io.BytesIO(uploaded['SentDATA.csv']))
# Dataset is now stored in a Pandas Dataframe
df = pd.read_csv('../../data/dadesTFM.csv')
df.reset_index(inplace=True)
df['Time'] = pd.to_datetime(df['Time'])
df = df.set_index('Time')
columns = ['PM1','PM25','PM10','PM1ATM','PM25ATM','PM10ATM']
df1 = df.copy();
df1 = df1.rename(columns={"PM 1":"PM1","PM 2.5":"PM25","PM 10":"PM10","PM 1 ATM":"PM1ATM","PM 2.5 ATM":"PM25ATM","PM 10 ATM":"PM10ATM"})
df1['PM1'] = df['PM 1'].astype(np.float32)
df1['PM25'] = df['PM 2.5'].astype(np.float32)
df1['PM10'] = df['PM 10'].astype(np.float32)
df1['PM1ATM'] = df['PM 1 ATM'].astype(np.float32)
df1['PM25ATM'] = df['PM 2.5 ATM'].astype(np.float32)
df1['PM10ATM'] = df['PM 10 ATM'].astype(np.float32)
df2 = df1.copy()
train_size = int(len(df2) * 0.8)
test_size = len(df2) - train_size
train, test = df2.iloc[0:train_size], df2.iloc[train_size:len(df2)]
train.shape, test.shape
((3117, 7), (780, 7))
#Standardize the data
for col in columns:
scaler = StandardScaler()
train[col] = scaler.fit_transform(train[[col]])
<ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]])
def create_sequences(X, y, time_steps=TIME_STEPS):
Xs, ys = [], []
for i in range(len(X)-time_steps):
Xs.append(X.iloc[i:(i+time_steps)].values)
ys.append(y.iloc[i+time_steps])
return np.array(Xs), np.array(ys)
X_train, y_train = create_sequences(train[[columns[1]]], train[columns[1]])
#X_test, y_test = create_sequences(test[[columns[1]]], test[columns[1]])
print(f'X_train shape: {X_train.shape}')
print(f'y_train shape: {y_train.shape}')
X_train shape: (2973, 144, 1) y_train shape: (2973,)
#afegir nova mètrica
def rmse(y_true, y_pred):
return backend.sqrt(backend.mean(backend.square(y_pred - y_true), axis=-1))
model = Sequential()
model.add(CMODEL(units = UNITS, return_sequences=True, input_shape=(X_train.shape[1], X_train.shape[2])))
model.add(Dropout(rate=DROPOUT1))
model.add(CMODEL(units = UNITS, return_sequences=True))
model.add(Dropout(rate=DROPOUT2))
model.add(TimeDistributed(Dense(1,kernel_initializer='normal',activation=ACTIVATION)))
model.compile(optimizer=OPTIMIZER, loss='mae',metrics=['mse',rmse])
model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= gru (GRU) (None, 144, 43) 5934 _________________________________________________________________ dropout (Dropout) (None, 144, 43) 0 _________________________________________________________________ gru_1 (GRU) (None, 144, 43) 11352 _________________________________________________________________ dropout_1 (Dropout) (None, 144, 43) 0 _________________________________________________________________ time_distributed (TimeDistri (None, 144, 1) 44 ================================================================= Total params: 17,330 Trainable params: 17,330 Non-trainable params: 0 _________________________________________________________________
history = model.fit(X_train, y_train, epochs=EPOCHS, batch_size=BATCHSIZE, validation_split=VALIDATIONSPLIT,
callbacks=[keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, mode='min')], shuffle=False)
Epoch 1/56 244/244 [==============================] - 21s 87ms/step - loss: 0.7906 - mse: 1.0029 - rmse: 0.7923 - val_loss: 0.8740 - val_mse: 0.8011 - val_rmse: 0.8741 Epoch 2/56 244/244 [==============================] - 21s 87ms/step - loss: 0.7873 - mse: 0.9957 - rmse: 0.7893 - val_loss: 0.8668 - val_mse: 0.7885 - val_rmse: 0.8670 Epoch 3/56 244/244 [==============================] - 28s 114ms/step - loss: 0.7844 - mse: 0.9893 - rmse: 0.7867 - val_loss: 0.8604 - val_mse: 0.7771 - val_rmse: 0.8606 Epoch 4/56 244/244 [==============================] - 23s 94ms/step - loss: 0.7816 - mse: 0.9836 - rmse: 0.7843 - val_loss: 0.8544 - val_mse: 0.7667 - val_rmse: 0.8546 Epoch 5/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7793 - mse: 0.9785 - rmse: 0.7822 - val_loss: 0.8488 - val_mse: 0.7569 - val_rmse: 0.8490 Epoch 6/56 244/244 [==============================] - 17s 70ms/step - loss: 0.7770 - mse: 0.9736 - rmse: 0.7802 - val_loss: 0.8434 - val_mse: 0.7476 - val_rmse: 0.8437 Epoch 7/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7748 - mse: 0.9691 - rmse: 0.7783 - val_loss: 0.8382 - val_mse: 0.7387 - val_rmse: 0.8385 Epoch 8/56 244/244 [==============================] - 17s 71ms/step - loss: 0.7726 - mse: 0.9644 - rmse: 0.7763 - val_loss: 0.8330 - val_mse: 0.7299 - val_rmse: 0.8334 Epoch 9/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7706 - mse: 0.9603 - rmse: 0.7746 - val_loss: 0.8279 - val_mse: 0.7213 - val_rmse: 0.8283 Epoch 10/56 244/244 [==============================] - 17s 71ms/step - loss: 0.7686 - mse: 0.9563 - rmse: 0.7730 - val_loss: 0.8228 - val_mse: 0.7127 - val_rmse: 0.8232 Epoch 11/56 244/244 [==============================] - 17s 69ms/step - loss: 0.7668 - mse: 0.9526 - rmse: 0.7714 - val_loss: 0.8177 - val_mse: 0.7043 - val_rmse: 0.8182 Epoch 12/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7648 - mse: 0.9486 - rmse: 0.7697 - val_loss: 0.8127 - val_mse: 0.6959 - val_rmse: 0.8132 Epoch 13/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7629 - mse: 0.9445 - rmse: 0.7681 - val_loss: 0.8076 - val_mse: 0.6875 - val_rmse: 0.8082 Epoch 14/56 244/244 [==============================] - 17s 70ms/step - loss: 0.7609 - mse: 0.9406 - rmse: 0.7665 - val_loss: 0.8025 - val_mse: 0.6792 - val_rmse: 0.8031 Epoch 15/56 244/244 [==============================] - 17s 71ms/step - loss: 0.7590 - mse: 0.9367 - rmse: 0.7649 - val_loss: 0.7974 - val_mse: 0.6709 - val_rmse: 0.7981 Epoch 16/56 244/244 [==============================] - 17s 69ms/step - loss: 0.7570 - mse: 0.9325 - rmse: 0.7632 - val_loss: 0.7922 - val_mse: 0.6626 - val_rmse: 0.7930 Epoch 17/56 244/244 [==============================] - 17s 71ms/step - loss: 0.7551 - mse: 0.9291 - rmse: 0.7616 - val_loss: 0.7871 - val_mse: 0.6544 - val_rmse: 0.7879 Epoch 18/56 244/244 [==============================] - 17s 69ms/step - loss: 0.7533 - mse: 0.9256 - rmse: 0.7603 - val_loss: 0.7819 - val_mse: 0.6461 - val_rmse: 0.7827 Epoch 19/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7515 - mse: 0.9220 - rmse: 0.7588 - val_loss: 0.7766 - val_mse: 0.6379 - val_rmse: 0.7776 Epoch 20/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7496 - mse: 0.9181 - rmse: 0.7573 - val_loss: 0.7714 - val_mse: 0.6297 - val_rmse: 0.7724 Epoch 21/56 244/244 [==============================] - 17s 70ms/step - loss: 0.7478 - mse: 0.9147 - rmse: 0.7558 - val_loss: 0.7661 - val_mse: 0.6215 - val_rmse: 0.7672 Epoch 22/56 244/244 [==============================] - 17s 69ms/step - loss: 0.7458 - mse: 0.9109 - rmse: 0.7543 - val_loss: 0.7608 - val_mse: 0.6133 - val_rmse: 0.7620 Epoch 23/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7440 - mse: 0.9075 - rmse: 0.7529 - val_loss: 0.7555 - val_mse: 0.6051 - val_rmse: 0.7568 Epoch 24/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7420 - mse: 0.9037 - rmse: 0.7514 - val_loss: 0.7502 - val_mse: 0.5970 - val_rmse: 0.7515 Epoch 25/56 244/244 [==============================] - 17s 71ms/step - loss: 0.7404 - mse: 0.9004 - rmse: 0.7502 - val_loss: 0.7448 - val_mse: 0.5889 - val_rmse: 0.7463 Epoch 26/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7384 - mse: 0.8968 - rmse: 0.7486 - val_loss: 0.7394 - val_mse: 0.5807 - val_rmse: 0.7409 Epoch 27/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7364 - mse: 0.8932 - rmse: 0.7471 - val_loss: 0.7340 - val_mse: 0.5726 - val_rmse: 0.7356 Epoch 28/56 244/244 [==============================] - 16s 68ms/step - loss: 0.7347 - mse: 0.8898 - rmse: 0.7459 - val_loss: 0.7286 - val_mse: 0.5646 - val_rmse: 0.7303 Epoch 29/56 244/244 [==============================] - 17s 71ms/step - loss: 0.7331 - mse: 0.8868 - rmse: 0.7447 - val_loss: 0.7232 - val_mse: 0.5566 - val_rmse: 0.7250 Epoch 30/56 244/244 [==============================] - 16s 68ms/step - loss: 0.7311 - mse: 0.8832 - rmse: 0.7432 - val_loss: 0.7177 - val_mse: 0.5486 - val_rmse: 0.7196 Epoch 31/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7291 - mse: 0.8797 - rmse: 0.7418 - val_loss: 0.7123 - val_mse: 0.5407 - val_rmse: 0.7143 Epoch 32/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7276 - mse: 0.8770 - rmse: 0.7407 - val_loss: 0.7068 - val_mse: 0.5328 - val_rmse: 0.7089 Epoch 33/56 244/244 [==============================] - 18s 72ms/step - loss: 0.7258 - mse: 0.8738 - rmse: 0.7394 - val_loss: 0.7013 - val_mse: 0.5250 - val_rmse: 0.7035 Epoch 34/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7240 - mse: 0.8701 - rmse: 0.7380 - val_loss: 0.6959 - val_mse: 0.5172 - val_rmse: 0.6982 Epoch 35/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7221 - mse: 0.8670 - rmse: 0.7367 - val_loss: 0.6904 - val_mse: 0.5094 - val_rmse: 0.6928 Epoch 36/56 244/244 [==============================] - 17s 69ms/step - loss: 0.7205 - mse: 0.8641 - rmse: 0.7356 - val_loss: 0.6849 - val_mse: 0.5017 - val_rmse: 0.6874 Epoch 37/56 244/244 [==============================] - 17s 70ms/step - loss: 0.7189 - mse: 0.8610 - rmse: 0.7345 - val_loss: 0.6794 - val_mse: 0.4940 - val_rmse: 0.6820 Epoch 38/56 244/244 [==============================] - 17s 70ms/step - loss: 0.7171 - mse: 0.8583 - rmse: 0.7333 - val_loss: 0.6740 - val_mse: 0.4864 - val_rmse: 0.6767 Epoch 39/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7155 - mse: 0.8554 - rmse: 0.7322 - val_loss: 0.6685 - val_mse: 0.4789 - val_rmse: 0.6713 Epoch 40/56 244/244 [==============================] - 17s 69ms/step - loss: 0.7136 - mse: 0.8521 - rmse: 0.7309 - val_loss: 0.6630 - val_mse: 0.4714 - val_rmse: 0.6659 Epoch 41/56 244/244 [==============================] - 17s 71ms/step - loss: 0.7121 - mse: 0.8496 - rmse: 0.7299 - val_loss: 0.6575 - val_mse: 0.4640 - val_rmse: 0.6606 Epoch 42/56 244/244 [==============================] - 17s 70ms/step - loss: 0.7103 - mse: 0.8464 - rmse: 0.7287 - val_loss: 0.6521 - val_mse: 0.4567 - val_rmse: 0.6553 Epoch 43/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7088 - mse: 0.8439 - rmse: 0.7278 - val_loss: 0.6466 - val_mse: 0.4494 - val_rmse: 0.6499 Epoch 44/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7071 - mse: 0.8409 - rmse: 0.7267 - val_loss: 0.6411 - val_mse: 0.4422 - val_rmse: 0.6446 Epoch 45/56 244/244 [==============================] - 17s 70ms/step - loss: 0.7055 - mse: 0.8383 - rmse: 0.7257 - val_loss: 0.6357 - val_mse: 0.4351 - val_rmse: 0.6393 Epoch 46/56 244/244 [==============================] - 17s 68ms/step - loss: 0.7042 - mse: 0.8358 - rmse: 0.7249 - val_loss: 0.6302 - val_mse: 0.4281 - val_rmse: 0.6340 Epoch 47/56 244/244 [==============================] - 17s 69ms/step - loss: 0.7024 - mse: 0.8331 - rmse: 0.7238 - val_loss: 0.6247 - val_mse: 0.4211 - val_rmse: 0.6287 Epoch 48/56 244/244 [==============================] - 17s 69ms/step - loss: 0.7010 - mse: 0.8309 - rmse: 0.7229 - val_loss: 0.6193 - val_mse: 0.4142 - val_rmse: 0.6235 Epoch 49/56 244/244 [==============================] - 17s 70ms/step - loss: 0.6993 - mse: 0.8277 - rmse: 0.7217 - val_loss: 0.6139 - val_mse: 0.4074 - val_rmse: 0.6182 Epoch 50/56 244/244 [==============================] - 17s 68ms/step - loss: 0.6979 - mse: 0.8254 - rmse: 0.7209 - val_loss: 0.6085 - val_mse: 0.4007 - val_rmse: 0.6130 Epoch 51/56 244/244 [==============================] - 17s 68ms/step - loss: 0.6964 - mse: 0.8231 - rmse: 0.7202 - val_loss: 0.6031 - val_mse: 0.3941 - val_rmse: 0.6078 Epoch 52/56 244/244 [==============================] - 17s 70ms/step - loss: 0.6951 - mse: 0.8208 - rmse: 0.7194 - val_loss: 0.5977 - val_mse: 0.3875 - val_rmse: 0.6027 Epoch 53/56 244/244 [==============================] - 17s 69ms/step - loss: 0.6936 - mse: 0.8186 - rmse: 0.7185 - val_loss: 0.5924 - val_mse: 0.3811 - val_rmse: 0.5975 Epoch 54/56 244/244 [==============================] - 17s 69ms/step - loss: 0.6921 - mse: 0.8160 - rmse: 0.7177 - val_loss: 0.5871 - val_mse: 0.3747 - val_rmse: 0.5924 Epoch 55/56 244/244 [==============================] - 17s 68ms/step - loss: 0.6906 - mse: 0.8136 - rmse: 0.7167 - val_loss: 0.5817 - val_mse: 0.3685 - val_rmse: 0.5873 Epoch 56/56 244/244 [==============================] - 17s 70ms/step - loss: 0.6893 - mse: 0.8117 - rmse: 0.7161 - val_loss: 0.5765 - val_mse: 0.3623 - val_rmse: 0.5822
import matplotlib.pyplot as plt
plt.plot(history.history['loss'], label='MAE Training loss')
plt.plot(history.history['val_loss'], label='MAE Validation loss')
plt.plot(history.history['mse'], label='MSE Training loss')
plt.plot(history.history['val_mse'], label='MSE Validation loss')
plt.plot(history.history['rmse'], label='RMSE Training loss')
plt.plot(history.history['val_rmse'], label='RMSE Validation loss')
plt.legend();
X_train_pred = model.predict(X_train, verbose=0)
train_mae_loss = np.mean(np.abs(X_train_pred - X_train), axis=1)
plt.hist(train_mae_loss, bins=50)
plt.xlabel('Train MAE loss')
plt.ylabel('Number of Samples');
def evaluate_prediction(predictions, actual, model_name):
errors = predictions - actual
mse = np.square(errors).mean()
rmse = np.sqrt(mse)
mae = np.abs(errors).mean()
print(model_name + ':')
print('Mean Absolute Error: {:.4f}'.format(mae))
print('Root Mean Square Error: {:.4f}'.format(rmse))
print('Mean Square Error: {:.4f}'.format(mse))
print('')
return mae,rmse,mse
mae,rmse,mse = evaluate_prediction(X_train_pred, X_train,MODEL)
GRU: Mean Absolute Error: 0.5468 Root Mean Square Error: 0.7323 Mean Square Error: 0.5363
model.save(MODELFILENAME+'.h5')
#càlcul del threshold de test
def calculate_threshold(X_test, X_test_pred):
distance = np.sqrt(np.mean(np.square(X_test_pred - X_test),axis=1))
"""Sorting the scores/diffs and using a 0.80 as cutoff value to pick the threshold"""
distance.sort();
cut_off = int(0.9 * len(distance));
threshold = distance[cut_off];
return threshold
for col in columns:
print ("####################### "+col +" ###########################")
#Standardize the test data
scaler = StandardScaler()
test_cpy = test.copy()
test[col] = scaler.fit_transform(test[[col]])
#creem seqüencia amb finestra temporal per les dades de test
X_test1, y_test1 = create_sequences(test[[col]], test[col])
print(f'Testing shape: {X_test1.shape}')
#evaluem el model
eval = model.evaluate(X_test1, y_test1)
print("evaluate: ",eval)
#predim el model
X_test1_pred = model.predict(X_test1, verbose=0)
evaluate_prediction(X_test1_pred, X_test1,MODEL)
#càlcul del mae_loss
test1_mae_loss = np.mean(np.abs(X_test1_pred - X_test1), axis=1)
test1_rmse_loss = np.sqrt(np.mean(np.square(X_test1_pred - X_test1),axis=1))
# reshaping test prediction
X_test1_predReshape = X_test1_pred.reshape((X_test1_pred.shape[0] * X_test1_pred.shape[1]), X_test1_pred.shape[2])
# reshaping test data
X_test1Reshape = X_test1.reshape((X_test1.shape[0] * X_test1.shape[1]), X_test1.shape[2])
threshold_test = calculate_threshold(X_test1Reshape,X_test1_predReshape)
test1_score_df = pd.DataFrame(test[TIME_STEPS:])
test1_score_df['loss'] = test1_rmse_loss.reshape((-1))
test1_score_df['threshold'] = threshold_test
test1_score_df['anomaly'] = test1_score_df['loss'] > test1_score_df['threshold']
test1_score_df[col] = test[TIME_STEPS:][col]
#gràfic test lost i threshold
fig = go.Figure()
fig.add_trace(go.Scatter(x=test1_score_df.index, y=test1_score_df['loss'], name='Test loss'))
fig.add_trace(go.Scatter(x=test1_score_df.index, y=test1_score_df['threshold'], name='Threshold'))
fig.update_layout(showlegend=True, title='Test loss vs. Threshold')
fig.show()
#Posem les anomalies en un array
anomalies1 = test1_score_df.loc[test1_score_df['anomaly'] == True]
anomalies1.shape
print('anomalies: ',anomalies1.shape); print();
#Gràfic dels punts i de les anomalíes amb els valors de dades transformades per verificar que la normalització que s'ha fet no distorssiona les dades
fig = go.Figure()
fig.add_trace(go.Scatter(x=test1_score_df.index, y=scaler.inverse_transform(test1_score_df[col]), name=col))
fig.add_trace(go.Scatter(x=anomalies1.index, y=scaler.inverse_transform(anomalies1[col]), mode='markers', name='Anomaly'))
fig.update_layout(showlegend=True, title='Detected anomalies')
fig.show()
print ("######################################################")
####################### PM1 ########################### Testing shape: (636, 144, 1) 5/20 [======>.......................] - ETA: 0s - loss: 0.3852 - mse: 0.2508 - rmse: 0.4030
<ipython-input-17-e1f1d6df3b5c>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy test[col] = scaler.fit_transform(test[[col]])
20/20 [==============================] - 0s 17ms/step - loss: 0.6725 - mse: 0.9362 - rmse: 0.7076 evaluate: [0.6724979281425476, 0.9361976981163025, 0.7075791954994202] GRU: Mean Absolute Error: 0.4624 Root Mean Square Error: 0.6987 Mean Square Error: 0.4882
anomalies: (211, 10)
###################################################### ####################### PM25 ########################### Testing shape: (636, 144, 1) 4/20 [=====>........................] - ETA: 0s - loss: 0.3509 - mse: 0.2203 - rmse: 0.3668
<ipython-input-17-e1f1d6df3b5c>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
20/20 [==============================] - 0s 16ms/step - loss: 0.7081 - mse: 1.0160 - rmse: 0.7433 evaluate: [0.7081131935119629, 1.016035795211792, 0.7433144450187683] GRU: Mean Absolute Error: 0.4850 Root Mean Square Error: 0.7003 Mean Square Error: 0.4904
anomalies: (54, 10)
###################################################### ####################### PM10 ########################### Testing shape: (636, 144, 1) 4/20 [=====>........................] - ETA: 0s - loss: 0.3639 - mse: 0.2411 - rmse: 0.37
<ipython-input-17-e1f1d6df3b5c>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
20/20 [==============================] - 0s 16ms/step - loss: 0.7355 - mse: 1.0675 - rmse: 0.7706 evaluate: [0.7355344295501709, 1.0675370693206787, 0.770565390586853] GRU: Mean Absolute Error: 0.5056 Root Mean Square Error: 0.7012 Mean Square Error: 0.4916
anomalies: (25, 10)
###################################################### ####################### PM1ATM ########################### Testing shape: (636, 144, 1) 9/20 [============>.................] - ETA: 0s - loss: 0.6778 - mse: 0.8789 - rmse: 0.7075
<ipython-input-17-e1f1d6df3b5c>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
20/20 [==============================] - 0s 15ms/step - loss: 0.7379 - mse: 0.9711 - rmse: 0.7737 evaluate: [0.7378941774368286, 0.9710586071014404, 0.7736711502075195] GRU: Mean Absolute Error: 0.5136 Root Mean Square Error: 0.7015 Mean Square Error: 0.4921
anomalies: (0, 10)
###################################################### ####################### PM25ATM ########################### Testing shape: (636, 144, 1) 5/20 [======>.......................] - ETA: 0s - loss: 0.4379 - mse: 0.3288 - rmse: 0.4584
<ipython-input-17-e1f1d6df3b5c>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
20/20 [==============================] - 0s 15ms/step - loss: 0.7269 - mse: 0.9544 - rmse: 0.7629 evaluate: [0.7269006967544556, 0.9543995261192322, 0.7629144191741943] GRU: Mean Absolute Error: 0.5064 Root Mean Square Error: 0.6989 Mean Square Error: 0.4884
anomalies: (0, 10)
###################################################### ####################### PM10ATM ########################### Testing shape: (636, 144, 1) 5/20 [======>.......................] - ETA: 0s - loss: 0.4167 - mse: 0.3129 - rmse: 0.4336
<ipython-input-17-e1f1d6df3b5c>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
20/20 [==============================] - 0s 15ms/step - loss: 0.7209 - mse: 0.9636 - rmse: 0.7556 evaluate: [0.7208620309829712, 0.9636226892471313, 0.7555688619613647] GRU: Mean Absolute Error: 0.4984 Root Mean Square Error: 0.6967 Mean Square Error: 0.4853
anomalies: (0, 10)
######################################################